In [2]:
import numpy as np
import pandas as pd
import keras
import tensorflow as tf
import os

from keras.models import Model
from keras.layers import Input, Conv2D, Flatten, MaxPooling2D, Dense, Dropout
from keras.optimizers import RMSprop
from keras.utils.np_utils import to_categorical 
from keras import backend as K

from sklearn.model_selection import train_test_split

os.environ["CUDA_VISIBLE_DEVICES"] = '1'
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'

In [186]:
train_data = pd.read_csv('./facial-keypoints-detection/training.csv')

In [152]:
cols, rows = 96, 96
train_data.isnull().any()
#train_data.isnull().sum(axis=0)
#train_data = train_data.dropna()
#train_data.shape


Out[152]:
left_eye_center_x             True
left_eye_center_y             True
right_eye_center_x            True
right_eye_center_y            True
left_eye_inner_corner_x       True
left_eye_inner_corner_y       True
left_eye_outer_corner_x       True
left_eye_outer_corner_y       True
right_eye_inner_corner_x      True
right_eye_inner_corner_y      True
right_eye_outer_corner_x      True
right_eye_outer_corner_y      True
left_eyebrow_inner_end_x      True
left_eyebrow_inner_end_y      True
left_eyebrow_outer_end_x      True
left_eyebrow_outer_end_y      True
right_eyebrow_inner_end_x     True
right_eyebrow_inner_end_y     True
right_eyebrow_outer_end_x     True
right_eyebrow_outer_end_y     True
nose_tip_x                   False
nose_tip_y                   False
mouth_left_corner_x           True
mouth_left_corner_y           True
mouth_right_corner_x          True
mouth_right_corner_y          True
mouth_center_top_lip_x        True
mouth_center_top_lip_y        True
mouth_center_bottom_lip_x     True
mouth_center_bottom_lip_y     True
Image                        False
dtype: bool

In [70]:
images = [np.array(list(map(int,image.split(' ')))).reshape(cols,rows)/255 for image in train_data['Image'].values]

In [71]:
images = np.array(images)
images = images.reshape(images.shape[0],cols,rows,1)
X_train = images
#y_train = train_data[['nose_tip_x','nose_tip_y']].values
y_train = train_data.drop(['Image'],axis=1).fillna(np.nan).values
#y_trains = y_train.reshape(y_train.shape[0],2)
print("Input Shape : {0}".format(X_train.shape))
print("Output Shape : {0}".format(y_train.shape))


Input Shape : (7049, 96, 96, 1)
Output Shape : (7049, 30)

In [57]:
pos = np.where(np.isnan(y_trains[:,1])==False)
y_trains[pos,1].transpose().shape
X_train[pos].shape


Out[57]:
(7039, 96, 96, 1)

Deep learning

With Improved Loss Function


In [106]:
def face_loss(y_true, y_pred):
#     y = tf.constant([[1., np.nan], [np.nan, 2]])
#     x = tf.constant([[1.1, 1], [0, 2.1]])
    #_zeros = tf.zeros(y_pred.get_shape())
    _zeros = tf.multiply(tf.constant(0.0),y_pred)
    _y_true = tf.where(tf.is_nan(y_true),y_pred, y_true)
    #_loss = tf.where(tf.is_nan(y_true),_zeros, tf.square(y_pred - y_true))
    #return tf.reduce_mean(_loss, axis=-1)
    return K.mean(K.square(y_pred - _y_true), axis=-1)

y = tf.constant([[1., np.nan], [np.nan, 2]])
x = tf.constant([[1.1, 1], [0, 2.1]])
with tf.Session() as sess:
    print(sess.run(face_loss(y,x)))


[0.005      0.00499999]

In [145]:
def build_model(input_shape, output_shape):
    input_tensor = Input(shape=input_shape)
    out = Conv2D(filters = 64, kernel_size = (5,5),padding = 'Same', activation ='relu')(input_tensor)
    out = Conv2D(filters = 64, kernel_size = (5,5),padding = 'Same', activation ='relu')(out)
    out = MaxPooling2D(pool_size=(2,2))(out)
    out = Dropout(0.25)(out)
    
    out = Conv2D(filters = 64, kernel_size = (3,3),padding = 'Same', activation ='relu')(out)
    out = Conv2D(filters = 64, kernel_size = (3,3),padding = 'Same', activation ='linear')(out)
    out = MaxPooling2D(pool_size=(2,2), strides=(2,2))(out)
    out = Dropout(0.25)(out)

    out = Flatten()(out)
    out = Dense(256, activation = "linear")(out)
    out = Dropout(0.5)(out)
#     out = Dense(128, activation = "linear")(out)
#     out = Dropout(0.25)(out)
    output_tensor = Dense(output_shape, activation = "linear")(out)
    return input_tensor, output_tensor

In [146]:
input_shape = (cols, rows, 1)
output_shape = 30
#nb_features = 30
input_tensor, output_tensor = build_model(input_shape, output_shape)

#models = [Model(inputs=input_tensor, outputs=build_recognize(input_tensor, output_shape)) for i in range(nb_features)]    
model = Model(inputs=input_tensor, outputs=output_tensor)
model.compile(
        optimizer = RMSprop(lr=0.0005, rho=0.9, epsilon=1e-08, decay=0.0),
        #loss = "categorical_crossentropy", 
        loss=face_loss,
        metrics=["accuracy"]
    )
# for i in range(nb_features):
#     models[i].compile(
#         optimizer = RMSprop(lr=0.001, rho=0.9, epsilon=1e-08, decay=0.0),
#         #loss = "categorical_crossentropy", 
#         loss=face_loss,
#         metrics=["accuracy"]
#     )

In [147]:
random_seed = 2
_X_train, _X_val, _y_train, _y_val = train_test_split(X_train, y_train, test_size = 0.1, random_state=random_seed)
print("Train case {0}. Val case: {1}".format(len(_X_train),len(_X_val)))


Train case 6344. Val case: 705

In [148]:
history = model.fit(_X_train, _y_train, epochs=120, batch_size=128, validation_data=(_X_val, _y_val), verbose=1)


Train on 6344 samples, validate on 705 samples
Epoch 1/120
6344/6344 [==============================] - 7s 1ms/step - loss: 381.7233 - acc: 0.2295 - val_loss: 121.6854 - val_acc: 0.0823
Epoch 2/120
6344/6344 [==============================] - 6s 927us/step - loss: 170.2845 - acc: 0.2201 - val_loss: 46.8201 - val_acc: 0.8908
Epoch 3/120
6344/6344 [==============================] - 6s 913us/step - loss: 118.0882 - acc: 0.2314 - val_loss: 30.1111 - val_acc: 0.3461
Epoch 4/120
6344/6344 [==============================] - 6s 923us/step - loss: 86.6923 - acc: 0.2377 - val_loss: 14.1505 - val_acc: 0.8908
Epoch 5/120
6344/6344 [==============================] - 6s 917us/step - loss: 62.3805 - acc: 0.2527 - val_loss: 9.2680 - val_acc: 0.1106
Epoch 6/120
6344/6344 [==============================] - 6s 920us/step - loss: 47.2442 - acc: 0.2718 - val_loss: 10.8417 - val_acc: 0.8908
Epoch 7/120
6344/6344 [==============================] - 6s 920us/step - loss: 39.2658 - acc: 0.2765 - val_loss: 8.7961 - val_acc: 0.8610
Epoch 8/120
6344/6344 [==============================] - 6s 909us/step - loss: 34.4544 - acc: 0.2869 - val_loss: 9.2649 - val_acc: 0.8865
Epoch 9/120
6344/6344 [==============================] - 6s 910us/step - loss: 30.9309 - acc: 0.3058 - val_loss: 16.9532 - val_acc: 0.0823
Epoch 10/120
6344/6344 [==============================] - 6s 909us/step - loss: 29.8680 - acc: 0.2875 - val_loss: 8.0764 - val_acc: 0.8738
Epoch 11/120
6344/6344 [==============================] - 6s 907us/step - loss: 28.0141 - acc: 0.2897 - val_loss: 15.4916 - val_acc: 0.8057
Epoch 12/120
6344/6344 [==============================] - 6s 903us/step - loss: 26.0988 - acc: 0.3105 - val_loss: 8.0755 - val_acc: 0.8213
Epoch 13/120
6344/6344 [==============================] - 6s 899us/step - loss: 24.7419 - acc: 0.3138 - val_loss: 12.9935 - val_acc: 0.7773
Epoch 14/120
6344/6344 [==============================] - 6s 899us/step - loss: 23.9479 - acc: 0.3157 - val_loss: 7.4825 - val_acc: 0.0908
Epoch 15/120
6344/6344 [==============================] - 6s 910us/step - loss: 22.5355 - acc: 0.3138 - val_loss: 7.8716 - val_acc: 0.0894
Epoch 16/120
6344/6344 [==============================] - 6s 910us/step - loss: 22.1453 - acc: 0.3052 - val_loss: 10.1645 - val_acc: 0.1730
Epoch 17/120
6344/6344 [==============================] - 6s 906us/step - loss: 20.4540 - acc: 0.3380 - val_loss: 7.4580 - val_acc: 0.7957
Epoch 18/120
6344/6344 [==============================] - 6s 919us/step - loss: 20.7653 - acc: 0.3370 - val_loss: 6.5774 - val_acc: 0.4298
Epoch 19/120
6344/6344 [==============================] - 6s 923us/step - loss: 19.2485 - acc: 0.3294 - val_loss: 7.0780 - val_acc: 0.2922
Epoch 20/120
6344/6344 [==============================] - 6s 915us/step - loss: 18.5875 - acc: 0.3482 - val_loss: 6.3312 - val_acc: 0.6610
Epoch 21/120
6344/6344 [==============================] - 6s 909us/step - loss: 18.4447 - acc: 0.3378 - val_loss: 6.1805 - val_acc: 0.4823
Epoch 22/120
6344/6344 [==============================] - 6s 915us/step - loss: 18.0157 - acc: 0.3414 - val_loss: 6.4068 - val_acc: 0.3645
Epoch 23/120
6344/6344 [==============================] - 6s 916us/step - loss: 17.2558 - acc: 0.3608 - val_loss: 8.1411 - val_acc: 0.4908
Epoch 24/120
6344/6344 [==============================] - 6s 914us/step - loss: 17.2579 - acc: 0.3618 - val_loss: 11.6138 - val_acc: 0.5050
Epoch 25/120
6344/6344 [==============================] - 6s 916us/step - loss: 16.0239 - acc: 0.3715 - val_loss: 14.3590 - val_acc: 0.4411
Epoch 26/120
6344/6344 [==============================] - 6s 915us/step - loss: 16.3691 - acc: 0.3770 - val_loss: 5.7201 - val_acc: 0.4624
Epoch 27/120
6344/6344 [==============================] - 6s 915us/step - loss: 15.7837 - acc: 0.3878 - val_loss: 6.1059 - val_acc: 0.5135
Epoch 28/120
6344/6344 [==============================] - 6s 921us/step - loss: 14.8056 - acc: 0.3689 - val_loss: 4.6863 - val_acc: 0.5830
Epoch 29/120
6344/6344 [==============================] - 6s 917us/step - loss: 15.7275 - acc: 0.3920 - val_loss: 7.7256 - val_acc: 0.4284
Epoch 30/120
6344/6344 [==============================] - 6s 917us/step - loss: 14.9038 - acc: 0.4078 - val_loss: 4.5130 - val_acc: 0.4624
Epoch 31/120
6344/6344 [==============================] - 6s 914us/step - loss: 14.9085 - acc: 0.4072 - val_loss: 4.7827 - val_acc: 0.4610
Epoch 32/120
6344/6344 [==============================] - 6s 914us/step - loss: 14.5021 - acc: 0.4256 - val_loss: 7.1354 - val_acc: 0.4965
Epoch 33/120
6344/6344 [==============================] - 6s 917us/step - loss: 14.5473 - acc: 0.4113 - val_loss: 8.3944 - val_acc: 0.4681
Epoch 34/120
6344/6344 [==============================] - 6s 918us/step - loss: 14.2403 - acc: 0.4256 - val_loss: 4.5391 - val_acc: 0.4326
Epoch 35/120
6344/6344 [==============================] - 6s 913us/step - loss: 13.6863 - acc: 0.4128 - val_loss: 4.4721 - val_acc: 0.5702
Epoch 36/120
6344/6344 [==============================] - 6s 908us/step - loss: 14.2384 - acc: 0.4250 - val_loss: 7.5420 - val_acc: 0.6000
Epoch 37/120
6344/6344 [==============================] - 6s 911us/step - loss: 13.7485 - acc: 0.4376 - val_loss: 5.7223 - val_acc: 0.6752
Epoch 38/120
6344/6344 [==============================] - 6s 910us/step - loss: 13.5076 - acc: 0.4497 - val_loss: 5.7865 - val_acc: 0.3475
Epoch 39/120
6344/6344 [==============================] - 6s 914us/step - loss: 13.5415 - acc: 0.4423 - val_loss: 7.7463 - val_acc: 0.5858
Epoch 40/120
6344/6344 [==============================] - 6s 910us/step - loss: 13.3905 - acc: 0.4393 - val_loss: 5.7361 - val_acc: 0.5702
Epoch 41/120
6344/6344 [==============================] - 6s 908us/step - loss: 12.9799 - acc: 0.4448 - val_loss: 7.0290 - val_acc: 0.6170
Epoch 42/120
6344/6344 [==============================] - 6s 907us/step - loss: 13.3299 - acc: 0.4551 - val_loss: 4.1180 - val_acc: 0.5461
Epoch 43/120
6344/6344 [==============================] - 6s 914us/step - loss: 12.7764 - acc: 0.4574 - val_loss: 8.8296 - val_acc: 0.5645
Epoch 44/120
6344/6344 [==============================] - 6s 909us/step - loss: 13.8370 - acc: 0.4781 - val_loss: 4.4534 - val_acc: 0.6397
Epoch 45/120
6344/6344 [==============================] - 6s 901us/step - loss: 12.4702 - acc: 0.4447 - val_loss: 4.7741 - val_acc: 0.6000
Epoch 46/120
6344/6344 [==============================] - 6s 915us/step - loss: 13.1777 - acc: 0.4748 - val_loss: 6.6263 - val_acc: 0.5447
Epoch 47/120
6344/6344 [==============================] - 6s 908us/step - loss: 12.6800 - acc: 0.4466 - val_loss: 4.7965 - val_acc: 0.6525
Epoch 48/120
6344/6344 [==============================] - 6s 912us/step - loss: 12.5268 - acc: 0.4853 - val_loss: 4.2139 - val_acc: 0.6383
Epoch 49/120
6344/6344 [==============================] - 6s 911us/step - loss: 12.5022 - acc: 0.4551 - val_loss: 7.5144 - val_acc: 0.6028
Epoch 50/120
6344/6344 [==============================] - 6s 907us/step - loss: 12.9633 - acc: 0.4871 - val_loss: 7.7497 - val_acc: 0.5291
Epoch 51/120
6344/6344 [==============================] - 6s 907us/step - loss: 11.9619 - acc: 0.4669 - val_loss: 12.3927 - val_acc: 0.6199
Epoch 52/120
6344/6344 [==============================] - 6s 914us/step - loss: 12.3785 - acc: 0.4800 - val_loss: 4.1217 - val_acc: 0.6610
Epoch 53/120
6344/6344 [==============================] - 6s 910us/step - loss: 12.7347 - acc: 0.5052 - val_loss: 4.1795 - val_acc: 0.5518
Epoch 54/120
6344/6344 [==============================] - 6s 906us/step - loss: 12.3626 - acc: 0.4737 - val_loss: 4.1383 - val_acc: 0.5021
Epoch 55/120
6344/6344 [==============================] - 6s 903us/step - loss: 12.2400 - acc: 0.4538 - val_loss: 4.2812 - val_acc: 0.5759
Epoch 56/120
6344/6344 [==============================] - 6s 912us/step - loss: 12.3586 - acc: 0.5199 - val_loss: 6.2243 - val_acc: 0.6113
Epoch 57/120
6344/6344 [==============================] - 6s 908us/step - loss: 12.2987 - acc: 0.4735 - val_loss: 4.0231 - val_acc: 0.5589
Epoch 58/120
6344/6344 [==============================] - 6s 904us/step - loss: 12.1999 - acc: 0.4968 - val_loss: 6.9061 - val_acc: 0.5745
Epoch 59/120
6344/6344 [==============================] - 6s 909us/step - loss: 11.9502 - acc: 0.4652 - val_loss: 4.0663 - val_acc: 0.5901
Epoch 60/120
6344/6344 [==============================] - 6s 904us/step - loss: 11.9365 - acc: 0.4929 - val_loss: 6.1059 - val_acc: 0.5504
Epoch 61/120
6344/6344 [==============================] - 6s 909us/step - loss: 12.0005 - acc: 0.4849 - val_loss: 11.7529 - val_acc: 0.6426
Epoch 62/120
6344/6344 [==============================] - 6s 907us/step - loss: 12.0332 - acc: 0.4860 - val_loss: 4.4076 - val_acc: 0.6255
Epoch 63/120
6344/6344 [==============================] - 6s 908us/step - loss: 11.7565 - acc: 0.4767 - val_loss: 8.7443 - val_acc: 0.4610
Epoch 64/120
6344/6344 [==============================] - 6s 921us/step - loss: 11.9660 - acc: 0.4929 - val_loss: 10.0232 - val_acc: 0.6241
Epoch 65/120
6344/6344 [==============================] - 6s 913us/step - loss: 11.3629 - acc: 0.5115 - val_loss: 5.3967 - val_acc: 0.5319
Epoch 66/120
6344/6344 [==============================] - 6s 912us/step - loss: 12.0093 - acc: 0.4768 - val_loss: 6.2543 - val_acc: 0.6340
Epoch 67/120
6344/6344 [==============================] - 6s 917us/step - loss: 11.7373 - acc: 0.4934 - val_loss: 4.5164 - val_acc: 0.5532
Epoch 68/120
6344/6344 [==============================] - 6s 916us/step - loss: 11.2901 - acc: 0.5063 - val_loss: 4.6379 - val_acc: 0.5773
Epoch 69/120
6344/6344 [==============================] - 6s 916us/step - loss: 11.5418 - acc: 0.4727 - val_loss: 4.0963 - val_acc: 0.5716
Epoch 70/120
6344/6344 [==============================] - 6s 916us/step - loss: 11.6278 - acc: 0.4582 - val_loss: 5.1735 - val_acc: 0.5121
Epoch 71/120
6344/6344 [==============================] - 6s 917us/step - loss: 11.6695 - acc: 0.4882 - val_loss: 10.6132 - val_acc: 0.5418
Epoch 72/120
6344/6344 [==============================] - 6s 915us/step - loss: 11.0588 - acc: 0.5047 - val_loss: 9.6274 - val_acc: 0.7021
Epoch 73/120
6344/6344 [==============================] - 6s 910us/step - loss: 11.5882 - acc: 0.4902 - val_loss: 15.8601 - val_acc: 0.5319
Epoch 74/120
6344/6344 [==============================] - 6s 912us/step - loss: 11.4642 - acc: 0.4932 - val_loss: 5.0148 - val_acc: 0.5674
Epoch 75/120
6344/6344 [==============================] - 6s 906us/step - loss: 11.6828 - acc: 0.4964 - val_loss: 4.1309 - val_acc: 0.4624
Epoch 76/120
6344/6344 [==============================] - 6s 908us/step - loss: 11.3820 - acc: 0.5005 - val_loss: 4.0821 - val_acc: 0.6000
Epoch 77/120
6344/6344 [==============================] - 6s 906us/step - loss: 11.2288 - acc: 0.4899 - val_loss: 6.6267 - val_acc: 0.6397
Epoch 78/120
6344/6344 [==============================] - 6s 906us/step - loss: 11.1363 - acc: 0.5140 - val_loss: 5.7760 - val_acc: 0.5277
Epoch 79/120
6344/6344 [==============================] - 6s 906us/step - loss: 11.5675 - acc: 0.4844 - val_loss: 5.5692 - val_acc: 0.5035
Epoch 80/120
6344/6344 [==============================] - 6s 913us/step - loss: 11.3014 - acc: 0.4891 - val_loss: 4.0131 - val_acc: 0.5333
Epoch 81/120
6344/6344 [==============================] - 6s 917us/step - loss: 11.1792 - acc: 0.5000 - val_loss: 12.4940 - val_acc: 0.5532
Epoch 82/120
6344/6344 [==============================] - 6s 921us/step - loss: 11.1451 - acc: 0.5046 - val_loss: 8.2629 - val_acc: 0.5475
Epoch 83/120
6344/6344 [==============================] - 6s 918us/step - loss: 10.8922 - acc: 0.4909 - val_loss: 4.0307 - val_acc: 0.4965
Epoch 84/120
6344/6344 [==============================] - 6s 917us/step - loss: 11.1430 - acc: 0.4905 - val_loss: 4.0272 - val_acc: 0.6028
Epoch 85/120
6344/6344 [==============================] - 6s 909us/step - loss: 11.3455 - acc: 0.4745 - val_loss: 3.9341 - val_acc: 0.5518
Epoch 86/120
6344/6344 [==============================] - 6s 908us/step - loss: 10.7886 - acc: 0.4849 - val_loss: 4.3644 - val_acc: 0.6567
Epoch 87/120
6344/6344 [==============================] - 6s 923us/step - loss: 11.2115 - acc: 0.5181 - val_loss: 5.0144 - val_acc: 0.6326
Epoch 88/120
6344/6344 [==============================] - 6s 919us/step - loss: 11.0081 - acc: 0.5206 - val_loss: 7.4006 - val_acc: 0.7234
Epoch 89/120
6344/6344 [==============================] - 6s 912us/step - loss: 10.9474 - acc: 0.5197 - val_loss: 4.5365 - val_acc: 0.5390
Epoch 90/120
6344/6344 [==============================] - 6s 905us/step - loss: 10.5942 - acc: 0.5112 - val_loss: 4.7910 - val_acc: 0.5972
Epoch 91/120
6344/6344 [==============================] - 6s 907us/step - loss: 10.6492 - acc: 0.5173 - val_loss: 8.8603 - val_acc: 0.5560
Epoch 92/120
6344/6344 [==============================] - 6s 903us/step - loss: 10.7411 - acc: 0.4950 - val_loss: 7.6172 - val_acc: 0.5518
Epoch 93/120
6344/6344 [==============================] - 6s 907us/step - loss: 11.0100 - acc: 0.4658 - val_loss: 4.6125 - val_acc: 0.6170
Epoch 94/120
6344/6344 [==============================] - 6s 905us/step - loss: 10.9938 - acc: 0.4562 - val_loss: 4.2124 - val_acc: 0.5191
Epoch 95/120
6344/6344 [==============================] - 6s 911us/step - loss: 10.9835 - acc: 0.4980 - val_loss: 4.2698 - val_acc: 0.5305
Epoch 96/120
6344/6344 [==============================] - 6s 907us/step - loss: 10.6927 - acc: 0.4994 - val_loss: 3.8978 - val_acc: 0.5319
Epoch 97/120
6344/6344 [==============================] - 6s 912us/step - loss: 10.5070 - acc: 0.4959 - val_loss: 12.2560 - val_acc: 0.7064
Epoch 98/120
6344/6344 [==============================] - 6s 915us/step - loss: 10.8634 - acc: 0.4909 - val_loss: 4.1811 - val_acc: 0.5716
Epoch 99/120
6344/6344 [==============================] - 6s 910us/step - loss: 10.8308 - acc: 0.4962 - val_loss: 4.8415 - val_acc: 0.4993
Epoch 100/120
6344/6344 [==============================] - 6s 907us/step - loss: 10.7310 - acc: 0.4946 - val_loss: 5.1193 - val_acc: 0.3305
Epoch 101/120
6344/6344 [==============================] - 6s 908us/step - loss: 11.0467 - acc: 0.4726 - val_loss: 5.8340 - val_acc: 0.5050
Epoch 102/120
6344/6344 [==============================] - 6s 903us/step - loss: 10.6197 - acc: 0.5006 - val_loss: 6.1647 - val_acc: 0.6099
Epoch 103/120
6344/6344 [==============================] - 6s 907us/step - loss: 10.7943 - acc: 0.5014 - val_loss: 6.3727 - val_acc: 0.5362
Epoch 104/120
6344/6344 [==============================] - 6s 902us/step - loss: 10.3942 - acc: 0.4827 - val_loss: 4.2794 - val_acc: 0.6142
Epoch 105/120
6344/6344 [==============================] - 6s 916us/step - loss: 10.5244 - acc: 0.5159 - val_loss: 4.8596 - val_acc: 0.5631
Epoch 106/120
6344/6344 [==============================] - 6s 919us/step - loss: 10.4554 - acc: 0.4991 - val_loss: 3.8970 - val_acc: 0.5191
Epoch 107/120
6344/6344 [==============================] - 6s 915us/step - loss: 10.7262 - acc: 0.4626 - val_loss: 4.8438 - val_acc: 0.5305
Epoch 108/120
6344/6344 [==============================] - 6s 912us/step - loss: 10.3650 - acc: 0.4997 - val_loss: 4.5323 - val_acc: 0.5319
Epoch 109/120
6344/6344 [==============================] - 6s 908us/step - loss: 10.7585 - acc: 0.5299 - val_loss: 9.0029 - val_acc: 0.5645
Epoch 110/120
6344/6344 [==============================] - 6s 905us/step - loss: 10.4742 - acc: 0.4984 - val_loss: 4.5870 - val_acc: 0.6255
Epoch 111/120
6344/6344 [==============================] - 6s 916us/step - loss: 10.4339 - acc: 0.4880 - val_loss: 4.2394 - val_acc: 0.5277
Epoch 112/120
6344/6344 [==============================] - 6s 912us/step - loss: 10.5576 - acc: 0.5210 - val_loss: 5.3877 - val_acc: 0.6411
Epoch 113/120
6344/6344 [==============================] - 6s 915us/step - loss: 10.4009 - acc: 0.5238 - val_loss: 3.7979 - val_acc: 0.5787
Epoch 114/120
6344/6344 [==============================] - 6s 914us/step - loss: 10.3862 - acc: 0.5110 - val_loss: 4.0092 - val_acc: 0.5858
Epoch 115/120
6344/6344 [==============================] - 6s 915us/step - loss: 10.3136 - acc: 0.4951 - val_loss: 4.8356 - val_acc: 0.6184
Epoch 116/120
6344/6344 [==============================] - 6s 925us/step - loss: 10.0475 - acc: 0.5292 - val_loss: 6.7605 - val_acc: 0.5844
Epoch 117/120
6344/6344 [==============================] - 6s 912us/step - loss: 10.3480 - acc: 0.5085 - val_loss: 6.9353 - val_acc: 0.5816
Epoch 118/120
6344/6344 [==============================] - 6s 904us/step - loss: 10.6677 - acc: 0.5032 - val_loss: 8.1323 - val_acc: 0.6511
Epoch 119/120
6344/6344 [==============================] - 6s 908us/step - loss: 10.3957 - acc: 0.4962 - val_loss: 3.9266 - val_acc: 0.4950
Epoch 120/120
6344/6344 [==============================] - 6s 910us/step - loss: 10.3842 - acc: 0.4924 - val_loss: 4.2054 - val_acc: 0.5546

Plot Result


In [149]:
import matplotlib.pyplot as plt
%matplotlib inline
index = 1
# image = train_data['Image'].values[index]
# image = np.array(list(map(int,image.split(' ')))).reshape(cols,rows)
image = images[index]
y_trues = y_trains[index,:]
#y_preds = np.array([models[i].predict(X_train[index:index+1])[0] for i in range(nb_features)])
y_preds = model.predict(X_train[index:index+1])[0]
y_trues = y_trues.reshape(15,2)
y_preds = y_preds.reshape(15,2)
plt.imshow(image.reshape(cols,rows),plt.cm.gray)
plt.scatter(y_trues[:,0],y_trues[:,1],c='b')
plt.scatter(y_preds[:,0],y_preds[:,1],c='r')


Out[149]:
<matplotlib.collections.PathCollection at 0x7fef752d8a58>

In [150]:
import matplotlib.pyplot as plt
import random
%matplotlib inline

cols_subplot, rows_subplot = 5,5
plt.figure(1,figsize=(20,20))
samples = random.sample(range(len(images)),cols_subplot*rows_subplot)
for index in range(1,cols_subplot*rows_subplot+1):
    plt.subplot(cols_subplot,rows_subplot,index)
    #index = 1
    image = images[index]
    y_trues = y_trains[index,:]
    #y_preds = np.array([models[i].predict(X_train[index:index+1])[0] for i in range(nb_features)])
    y_preds = model.predict(X_train[index:index+1])[0]
    y_trues = y_trues.reshape(15,2)
    y_preds = y_preds.reshape(15,2)
    plt.imshow(image.reshape(cols,rows),plt.cm.gray)
    plt.scatter(y_trues[:,0],y_trues[:,1],c='b')
    plt.scatter(y_preds[:,0],y_preds[:,1],c='r')



In [151]:
import matplotlib.pyplot as plt
import random
%matplotlib inline

cols_subplot, rows_subplot = 5,5
plt.figure(figsize=(20,20))
index_samples = random.sample(range(len(images)),cols_subplot*rows_subplot)
for i in range(1,cols_subplot*rows_subplot+1):
    plt.subplot(cols_subplot,rows_subplot,i)
    #index = 1
    index = index_samples[i-1]
    image = images[index]
    y_trues = y_train[index,:]
    #y_preds = np.array([models[i].predict(X_train[index:index+1])[0] for i in range(nb_features)])
    y_preds = model.predict(X_train[index:index+1])[0]
    y_trues = y_trues.reshape(15,2)
    y_preds = y_preds.reshape(15,2)
    plt.imshow(image.reshape(cols,rows),plt.cm.gray)
    plt.scatter(y_trues[:,0],y_trues[:,1],c='b')
    plt.scatter(y_preds[:,0],y_preds[:,1],c='r')


Explore Tensorflow


In [100]:
y = tf.constant([[1., np.nan], [np.nan, 2]])
x = tf.constant([[1.1, 1], [0, 2.1]])
#_zeros = tf.zeros(x.get_shape())
_zeros = tf.multiply(tf.constant(0.0),x)
_loss = tf.where(tf.is_nan(y),_zeros, tf.square(x-y))
with tf.Session() as sess:
    print(sess.run(_loss))


[[0.01       0.        ]
 [0.         0.00999998]]

Predict test


In [173]:
test_data = pd.read_csv('./facial-keypoints-detection/test.csv')
result = pd.read_csv('./facial-keypoints-detection/SampleSubmission.csv')
#result['Location'] = y_pred.astype(int)
ids = pd.read_csv('./facial-keypoints-detection/IdLookupTable.csv')

In [157]:
test_images = [np.array(list(map(int,image.split(' ')))).reshape(cols,rows)/255 for image in test_data['Image'].values]

In [161]:
test_images = np.array(test_images)
test_images = test_images.reshape(test_images.shape[0],cols,rows,1)
X_test = test_images
X_test.shape


Out[161]:
(1783, 96, 96, 1)

In [162]:
y_preds = model.predict(X_test)

In [168]:
y_preds


Out[168]:
array([[66.95357 , 37.724968, 29.685965, ..., 70.33713 , 47.799217,
        82.10255 ],
       [68.20598 , 37.42697 , 29.025938, ..., 75.022194, 49.621384,
        85.25677 ],
       [67.908676, 38.226852, 31.260002, ..., 73.38641 , 50.087807,
        82.77596 ],
       ...,
       [68.26083 , 41.821327, 32.884735, ..., 76.801765, 47.32155 ,
        80.24187 ],
       [64.13208 , 37.761017, 29.656454, ..., 71.15032 , 45.988907,
        75.80935 ],
       [68.53035 , 41.406242, 33.077477, ..., 78.12774 , 49.730133,
        80.88666 ]], dtype=float32)

In [191]:
df_y_preds = pd.DataFrame(columns=train_data.drop(['Image'],axis=1).keys(), data=y_preds)

In [184]:
ids.iloc[0]['ImageId']


Out[184]:
1

In [202]:
for i in range(ids.values.shape[0]):
    image_index = ids.iloc[i]['ImageId'] - 1
    feature_name = ids.iloc[i]['FeatureName']
    value = df_y_preds.iloc[image_index][feature_name]
    result.iloc[i]['Location'] = value

In [203]:
result


Out[203]:
RowId Location
0 1 66
1 2 37
2 3 29
3 4 37
4 5 60
5 6 38
6 7 74
7 8 39
8 9 36
9 10 38
10 11 22
11 12 38
12 13 57
13 14 28
14 15 80
15 16 30
16 17 39
17 18 28
18 19 16
19 20 29
20 21 48
21 22 56
22 23 62
23 24 76
24 25 32
25 26 76
26 27 47
27 28 70
28 29 47
29 30 82
... ... ...
27094 27095 31
27095 27096 42
27096 27097 51
27097 27098 66
27098 27099 53
27099 27100 77
27100 27101 68
27101 27102 41
27102 27103 32
27103 27104 38
27104 27105 48
27105 27106 66
27106 27107 47
27107 27108 80
27108 27109 64
27109 27110 37
27110 27111 29
27111 27112 37
27112 27113 45
27113 27114 60
27114 27115 45
27115 27116 75
27116 27117 68
27117 27118 41
27118 27119 33
27119 27120 40
27120 27121 49
27121 27122 67
27122 27123 49
27123 27124 80

27124 rows × 2 columns


In [204]:
result.to_csv('./facial-keypoints-detection/submission.csv',index=False)

In [ ]: